Main Function
Here I have commented the code and displayed the output of the code by storing it in a text file and loading it in the r-chunk. This is because it will take too long to knit the document if the CNN model training code is executed. The accuracy and loss curves have been stored as images and I will display them in the Analysis section.
# Main function to train a neural network model on the CIFAR-100 dataset
# def main(argv):
#
# random_seed = 1
# torch.manual_seed(random_seed)
#
# # Path to the folder containing CIFAR-100 dataset
# ROOT_PATH = '/media/rj/New Volume/Northeastern University/Semester-2/DS 5220 - SMLT/Projects/Final Project/Code/'
#
# # Define batch size
# BATCH_SIZE = 128
#
# # Define data transformations
# train_transform = transforms.Compose([
# transforms.RandomCrop(32, padding=4, padding_mode='reflect'),
# transforms.RandomHorizontalFlip(),
# transforms.RandomRotation(10),
# transforms.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2, hue=0.1), # Apply color jittering
# transforms.ToTensor(),
# transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
# ])
#
# test_transform = transforms.Compose([
# transforms.ToTensor(),
# transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
# ])
#
# # Load CIFAR-100 training and evaluation datasets
# train_dataset = CIFAR100(root=ROOT_PATH, download=False, train=True, transform=train_transform)
# eval_dataset = CIFAR100(root=ROOT_PATH, download=False, train=False, transform=test_transform)
#
# # Create data loaders for training and evaluation
# train_data_loader = DataLoader(dataset=train_dataset, num_workers=2, batch_size=BATCH_SIZE, shuffle=True, pin_memory=True)
# eval_data_loader = DataLoader(dataset=eval_dataset, num_workers=2, batch_size=BATCH_SIZE*2, shuffle=False, pin_memory=True)
#
# # Instantiate the model and move it to the appropriate device
# model, device = create_model(num_classes=100)
#
# # Define loss function and optimizer
# criterion = nn.CrossEntropyLoss()
#
# # Define L2 regularization strength
# w_decay = 0.001
# optimizer = optim.Adam(model.parameters(), lr=0.001, weight_decay=w_decay)
#
# modelsavepath = "./models/model.pth"
# optimizersavepath = "./models/optimizer.pth"
# epochs = 50
#
# # Train the model
# #train_model(model, train_data_loader, eval_data_loader, optimizer, criterion, epochs)
#
# #total_steps = len(train_data_loader)*epochs
# steps_per_epoch = len(train_data_loader)
# # Define learning rate scheduler
# scheduler = optim.lr_scheduler.OneCycleLR(optimizer, max_lr=0.001, epochs=epochs, steps_per_epoch=steps_per_epoch)
#
#
# # Model Training Phase
# loss_train, loss_acc, loss_val, acc_val = train_model(
# train_data_loader, eval_data_loader, device, model,
# criterion, optimizer, scheduler, epochs
# )
#
# # Save model checkpoint and optimizer state
# torch.save(model.state_dict(), modelsavepath)
# torch.save(optimizer.state_dict(), optimizersavepath)
#
# # Plot the accuracy and loss curves
# visualize_training(loss_train, loss_val, loss_acc, acc_val, epochs)
#
# return
#
# if __name__ == "__main__":
# main(sys.argv)
# Define the path to your text file
file_path = "/media/rj/New Volume/Northeastern University/Semester-2/DS 5220 - SMLT/Projects/Final Project/Code/CNNResults.txt"
# Open the text file in read mode
with open(file_path, 'r') as file:
# Read all lines from the file
file_contents = file.readlines()
# Print the contents of the file
for line in file_contents:
print(line.strip()) # Use strip() to remove any leading/trailing whitespace
## cuda
## Training epoch 1
## Training Loss: 3.681, Training Accuracy: 0.146, Test Loss: 3.048, Test Accuracy: 0.257, Time: 65.32s
## Training epoch 2
## Training Loss: 3.058, Training Accuracy: 0.247, Test Loss: 2.702, Test Accuracy: 0.320, Time: 65.42s
## Training epoch 3
## Training Loss: 2.721, Training Accuracy: 0.310, Test Loss: 2.354, Test Accuracy: 0.379, Time: 65.68s
## Training epoch 4
## Training Loss: 2.477, Training Accuracy: 0.362, Test Loss: 2.213, Test Accuracy: 0.421, Time: 65.86s
## Training epoch 5
## Training Loss: 2.295, Training Accuracy: 0.402, Test Loss: 2.033, Test Accuracy: 0.459, Time: 66.00s
## Training epoch 6
## Training Loss: 2.148, Training Accuracy: 0.435, Test Loss: 2.007, Test Accuracy: 0.469, Time: 66.07s
## Training epoch 7
## Training Loss: 2.019, Training Accuracy: 0.463, Test Loss: 1.845, Test Accuracy: 0.501, Time: 66.17s
## Training epoch 8
## Training Loss: 1.909, Training Accuracy: 0.488, Test Loss: 1.801, Test Accuracy: 0.514, Time: 66.30s
## Training epoch 9
## Training Loss: 1.806, Training Accuracy: 0.512, Test Loss: 1.748, Test Accuracy: 0.529, Time: 66.42s
## Training epoch 10
## Training Loss: 1.718, Training Accuracy: 0.532, Test Loss: 1.661, Test Accuracy: 0.541, Time: 66.69s
## Training epoch 11
## Training Loss: 1.633, Training Accuracy: 0.553, Test Loss: 1.645, Test Accuracy: 0.546, Time: 66.82s
## Training epoch 12
## Training Loss: 1.554, Training Accuracy: 0.574, Test Loss: 1.576, Test Accuracy: 0.575, Time: 67.53s
## Training epoch 13
## Training Loss: 1.492, Training Accuracy: 0.586, Test Loss: 1.580, Test Accuracy: 0.572, Time: 67.98s
## Training epoch 14
## Training Loss: 1.420, Training Accuracy: 0.607, Test Loss: 1.546, Test Accuracy: 0.578, Time: 67.97s
## Training epoch 15
## Training Loss: 1.364, Training Accuracy: 0.620, Test Loss: 1.470, Test Accuracy: 0.589, Time: 67.86s
## Training epoch 16
## Training Loss: 1.296, Training Accuracy: 0.638, Test Loss: 1.472, Test Accuracy: 0.588, Time: 67.36s
## Training epoch 17
## Training Loss: 1.246, Training Accuracy: 0.650, Test Loss: 1.475, Test Accuracy: 0.593, Time: 67.66s
## Training epoch 18
## Training Loss: 1.195, Training Accuracy: 0.665, Test Loss: 1.437, Test Accuracy: 0.599, Time: 67.89s
## Training epoch 19
## Training Loss: 1.151, Training Accuracy: 0.674, Test Loss: 1.420, Test Accuracy: 0.609, Time: 68.22s
## Training epoch 20
## Training Loss: 1.089, Training Accuracy: 0.691, Test Loss: 1.412, Test Accuracy: 0.614, Time: 68.82s
## Training epoch 21
## Training Loss: 1.050, Training Accuracy: 0.702, Test Loss: 1.348, Test Accuracy: 0.622, Time: 69.02s
## Training epoch 22
## Training Loss: 1.006, Training Accuracy: 0.714, Test Loss: 1.367, Test Accuracy: 0.625, Time: 69.38s
## Training epoch 23
## Training Loss: 0.968, Training Accuracy: 0.721, Test Loss: 1.311, Test Accuracy: 0.635, Time: 69.12s
## Training epoch 24
## Training Loss: 0.927, Training Accuracy: 0.735, Test Loss: 1.338, Test Accuracy: 0.630, Time: 69.19s
## Training epoch 25
## Training Loss: 0.890, Training Accuracy: 0.744, Test Loss: 1.319, Test Accuracy: 0.631, Time: 69.23s
## Training epoch 26
## Training Loss: 0.845, Training Accuracy: 0.758, Test Loss: 1.329, Test Accuracy: 0.633, Time: 69.13s
## Training epoch 27
## Training Loss: 0.813, Training Accuracy: 0.767, Test Loss: 1.321, Test Accuracy: 0.635, Time: 69.13s
## Training epoch 28
## Training Loss: 0.776, Training Accuracy: 0.778, Test Loss: 1.326, Test Accuracy: 0.633, Time: 69.11s
## Training epoch 29
## Training Loss: 0.745, Training Accuracy: 0.787, Test Loss: 1.317, Test Accuracy: 0.640, Time: 69.09s
## Training epoch 30
## Training Loss: 0.715, Training Accuracy: 0.794, Test Loss: 1.308, Test Accuracy: 0.640, Time: 69.07s
## Training epoch 31
## Training Loss: 0.679, Training Accuracy: 0.806, Test Loss: 1.258, Test Accuracy: 0.651, Time: 69.09s
## Training epoch 32
## Training Loss: 0.652, Training Accuracy: 0.812, Test Loss: 1.262, Test Accuracy: 0.653, Time: 69.05s
## Training epoch 33
## Training Loss: 0.626, Training Accuracy: 0.822, Test Loss: 1.283, Test Accuracy: 0.650, Time: 69.05s
## Training epoch 34
## Training Loss: 0.605, Training Accuracy: 0.828, Test Loss: 1.278, Test Accuracy: 0.652, Time: 69.05s
## Training epoch 35
## Training Loss: 0.577, Training Accuracy: 0.834, Test Loss: 1.287, Test Accuracy: 0.648, Time: 69.05s
## Training epoch 36
## Training Loss: 0.548, Training Accuracy: 0.844, Test Loss: 1.276, Test Accuracy: 0.654, Time: 69.06s
## Training epoch 37
## Training Loss: 0.528, Training Accuracy: 0.852, Test Loss: 1.251, Test Accuracy: 0.659, Time: 69.05s
## Training epoch 38
## Training Loss: 0.511, Training Accuracy: 0.856, Test Loss: 1.258, Test Accuracy: 0.660, Time: 69.05s
## Training epoch 39
## Training Loss: 0.494, Training Accuracy: 0.861, Test Loss: 1.314, Test Accuracy: 0.652, Time: 69.04s
## Training epoch 40
## Training Loss: 0.464, Training Accuracy: 0.869, Test Loss: 1.297, Test Accuracy: 0.652, Time: 69.39s
## Training epoch 41
## Training Loss: 0.465, Training Accuracy: 0.869, Test Loss: 1.299, Test Accuracy: 0.655, Time: 69.05s
## Training epoch 42
## Training Loss: 0.432, Training Accuracy: 0.880, Test Loss: 1.278, Test Accuracy: 0.656, Time: 69.40s
## Training epoch 43
## Training Loss: 0.425, Training Accuracy: 0.882, Test Loss: 1.268, Test Accuracy: 0.660, Time: 69.06s
## Training epoch 44
## Training Loss: 0.404, Training Accuracy: 0.888, Test Loss: 1.261, Test Accuracy: 0.664, Time: 69.05s
## Training epoch 45
## Training Loss: 0.388, Training Accuracy: 0.893, Test Loss: 1.298, Test Accuracy: 0.659, Time: 69.06s
## Training epoch 46
## Training Loss: 0.376, Training Accuracy: 0.896, Test Loss: 1.253, Test Accuracy: 0.668, Time: 69.05s
## Training epoch 47
## Training Loss: 0.368, Training Accuracy: 0.898, Test Loss: 1.293, Test Accuracy: 0.658, Time: 69.06s
## Training epoch 48
## Training Loss: 0.350, Training Accuracy: 0.905, Test Loss: 1.247, Test Accuracy: 0.667, Time: 69.05s
## Training epoch 49
## Training Loss: 0.337, Training Accuracy: 0.908, Test Loss: 1.279, Test Accuracy: 0.661, Time: 69.42s
## Training epoch 50
## Training Loss: 0.330, Training Accuracy: 0.910, Test Loss: 1.276, Test Accuracy: 0.664, Time: 69.07s
## Training is complete.
## Average Training Loss: 1.111
## Average Test Loss: 1.505
## Average Training Accuracy: 0.699
## Average Test Accuracy: 0.594
## Average Training Time per Epoch: 68.23s